In [ ]:
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)

import os
for dirname, _, filenames in os.walk('/kaggle/input'):
    for filename in filenames:
        print(os.path.join(dirname, filename))
        break
/kaggle/input/testssss/testdata.csv
/kaggle/input/testssss/test_crop_image/wheat-field03.jpg
/kaggle/input/agriculture-crop-images/Crop_details.csv
/kaggle/input/agriculture-crop-images/kag2/rice/rice005arot.jpeg
/kaggle/input/agriculture-crop-images/kag2/maize/maize034a.jpeg
/kaggle/input/agriculture-crop-images/kag2/jute/jute014a.jpeg
/kaggle/input/agriculture-crop-images/kag2/wheat/wheat016arot.jpeg
/kaggle/input/agriculture-crop-images/kag2/sugarcane/sugarcane007ahs.jpeg
/kaggle/input/agriculture-crop-images/crop_images/rice/rice017a.jpeg
/kaggle/input/agriculture-crop-images/crop_images/maize/maize034a.jpeg
/kaggle/input/agriculture-crop-images/crop_images/jute/jute014a.jpeg
/kaggle/input/agriculture-crop-images/crop_images/wheat/wheat036a.jpeg
/kaggle/input/agriculture-crop-images/crop_images/sugarcane/sugarcane0001a.jpeg
In [2]:
%matplotlib inline
import warnings
warnings.filterwarnings('ignore')
from sklearn.preprocessing import OneHotEncoder
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import seaborn as sns
import tensorflow as tf 
import keras 
from keras.layers import Conv2D
from keras.models import Sequential
from keras.layers import MaxPool2D
from keras.layers import Flatten
from keras.layers import Dense
from keras.models import Model
from keras.preprocessing.image import ImageDataGenerator
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
import cv2
import re
import random
random.seed(0)
np.random.seed(0)

Plotting some images¶

In [3]:
wheat = plt.imread("/kaggle/input/agriculture-crop-images/kag2/wheat/wheat0004a.jpeg")
jute = plt.imread("/kaggle/input/agriculture-crop-images/kag2/jute/jute005a.jpeg")
cane = plt.imread("/kaggle/input/agriculture-crop-images/kag2/sugarcane/sugarcane0010arot.jpeg")
rice = plt.imread("/kaggle/input/agriculture-crop-images/kag2/rice/rice032ahs.jpeg")
maize = plt.imread("/kaggle/input/agriculture-crop-images/kag2/maize/maize003a.jpeg")
plt.figure(figsize=(20,3))
plt.subplot(1,5,1)
plt.imshow(jute)
plt.title("jute")
plt.subplot(1,5,2)
plt.imshow(maize)
plt.title("maize")
plt.subplot(1,5,3)
plt.imshow(rice)
plt.title("rice")
plt.subplot(1,5,4)
plt.imshow(cane)
plt.title("sugarcane")
plt.subplot(1,5,5)
plt.imshow(wheat)
plt.title("wheat")
Out[3]:
Text(0.5, 1.0, 'wheat')
No description has been provided for this image
In [4]:
jutepath = "../input/agriculture-crop-images/kag2/jute"
maizepath = "../input/agriculture-crop-images/kag2/maize"
ricepath = "../input/agriculture-crop-images/kag2/rice"
sugarcanepath = "../input/agriculture-crop-images/kag2/sugarcane"
wheatpath = "../input/agriculture-crop-images/kag2/wheat"

jutefilename = os.listdir(jutepath)
maizefilename = os.listdir(maizepath)
ricefilename = os.listdir(ricepath)
sugarcanefilename = os.listdir(sugarcanepath)
wheatfilename = os.listdir(wheatpath)

X= []
In [5]:
for fname in jutefilename:
    X.append([os.path.join(jutepath,fname),0])
for fname in maizefilename:
    X.append([os.path.join(maizepath,fname),1])
for fname in ricefilename:
    X.append([os.path.join(ricepath,fname),2])
for fname in sugarcanefilename:
    X.append([os.path.join(sugarcanepath,fname),3]) 
for fname in wheatfilename:
    X.append([os.path.join(wheatpath,fname),4])  
X = pd.DataFrame(X,columns = ['path','labels'])    
In [6]:
X.head()
Out[6]:
path labels
0 ../input/agriculture-crop-images/kag2/jute/jut... 0
1 ../input/agriculture-crop-images/kag2/jute/jut... 0
2 ../input/agriculture-crop-images/kag2/jute/jut... 0
3 ../input/agriculture-crop-images/kag2/jute/jut... 0
4 ../input/agriculture-crop-images/kag2/jute/jut... 0
In [7]:
ohencoder = OneHotEncoder(handle_unknown='ignore',sparse=False)
ohlabel = pd.DataFrame(ohencoder.fit_transform(X[['labels']]),dtype = 'float64',columns = ['label0','label1','label2','label3','label4'])
label_X = X.copy()
X = pd.concat([X,ohlabel],axis = 1)
new_X = X.drop(['labels'],axis = 1)
In [8]:
train,test = train_test_split(new_X,test_size=0.2,random_state=32,shuffle = True)
In [9]:
X_train = train['path'].values
y_train = train.drop(['path'],axis=1).values
X_test = test['path'].values
y_test = test.drop(['path'],axis=1).values
In [10]:
def deep_pipeline(data):
    flat = []
    for i in data:
        img = plt.imread(i)
        img = img/255.
        flat.append(img)
    flat =  np.array(flat)    
    flat = flat.reshape(-1,224,224,3)       
    return flat
    
In [11]:
dx_train = deep_pipeline(X_train)
dx_test = deep_pipeline(X_test)
In [12]:
keras.backend.clear_session()
vgg = keras.applications.VGG19(input_shape=(224,224,3),include_top=False,weights = 'imagenet',pooling='avg')
vgg.trainable = False
vggmodel = keras.Sequential([vgg
                         ,Dense(1000,activation='tanh'),Dense(1000,activation='tanh'),Dense(1000,activation='tanh'),Dense(5,activation='softmax')])

vggmodel.compile(optimizer = 'adam',loss = 'categorical_crossentropy',metrics=['accuracy'])
vggmodel.summary()
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/vgg19/vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5
80142336/80134624 [==============================] - 1s 0us/step
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
vgg19 (Model)                (None, 512)               20024384  
_________________________________________________________________
dense (Dense)                (None, 1000)              513000    
_________________________________________________________________
dense_1 (Dense)              (None, 1000)              1001000   
_________________________________________________________________
dense_2 (Dense)              (None, 1000)              1001000   
_________________________________________________________________
dense_3 (Dense)              (None, 5)                 5005      
=================================================================
Total params: 22,544,389
Trainable params: 2,520,005
Non-trainable params: 20,024,384
_________________________________________________________________
In [13]:
hist = vggmodel.fit(dx_train,y_train,epochs=50,validation_split=0.3,batch_size=16)
Epoch 1/50
28/28 [==============================] - 3s 109ms/step - loss: 3.0200 - accuracy: 0.2780 - val_loss: 1.3661 - val_accuracy: 0.4792
Epoch 2/50
28/28 [==============================] - 2s 76ms/step - loss: 1.2730 - accuracy: 0.5157 - val_loss: 1.2540 - val_accuracy: 0.5417
Epoch 3/50
28/28 [==============================] - 2s 77ms/step - loss: 1.0431 - accuracy: 0.5673 - val_loss: 1.2434 - val_accuracy: 0.5156
Epoch 4/50
28/28 [==============================] - 2s 76ms/step - loss: 0.8738 - accuracy: 0.6839 - val_loss: 1.1318 - val_accuracy: 0.5260
Epoch 5/50
28/28 [==============================] - 2s 77ms/step - loss: 0.7871 - accuracy: 0.6996 - val_loss: 0.6435 - val_accuracy: 0.7500
Epoch 6/50
28/28 [==============================] - 2s 76ms/step - loss: 0.7050 - accuracy: 0.7220 - val_loss: 0.6128 - val_accuracy: 0.7552
Epoch 7/50
28/28 [==============================] - 2s 76ms/step - loss: 0.6688 - accuracy: 0.7803 - val_loss: 0.8669 - val_accuracy: 0.6406
Epoch 8/50
28/28 [==============================] - 2s 77ms/step - loss: 0.4596 - accuracy: 0.8206 - val_loss: 0.5200 - val_accuracy: 0.7760
Epoch 9/50
28/28 [==============================] - 2s 76ms/step - loss: 0.4923 - accuracy: 0.8117 - val_loss: 0.7053 - val_accuracy: 0.7604
Epoch 10/50
28/28 [==============================] - 2s 75ms/step - loss: 0.3556 - accuracy: 0.8632 - val_loss: 0.4693 - val_accuracy: 0.8333
Epoch 11/50
28/28 [==============================] - 2s 76ms/step - loss: 0.5160 - accuracy: 0.8318 - val_loss: 0.9860 - val_accuracy: 0.6875
Epoch 12/50
28/28 [==============================] - 2s 78ms/step - loss: 0.4851 - accuracy: 0.8251 - val_loss: 0.8337 - val_accuracy: 0.7188
Epoch 13/50
28/28 [==============================] - 2s 77ms/step - loss: 0.4954 - accuracy: 0.8139 - val_loss: 1.0346 - val_accuracy: 0.7135
Epoch 14/50
28/28 [==============================] - 2s 77ms/step - loss: 0.3024 - accuracy: 0.9058 - val_loss: 0.4834 - val_accuracy: 0.8333
Epoch 15/50
28/28 [==============================] - 2s 80ms/step - loss: 0.2474 - accuracy: 0.9036 - val_loss: 0.4063 - val_accuracy: 0.8854
Epoch 16/50
28/28 [==============================] - 2s 79ms/step - loss: 0.1173 - accuracy: 0.9619 - val_loss: 0.4649 - val_accuracy: 0.8802
Epoch 17/50
28/28 [==============================] - 2s 76ms/step - loss: 0.1102 - accuracy: 0.9529 - val_loss: 0.4781 - val_accuracy: 0.8854
Epoch 18/50
28/28 [==============================] - 2s 78ms/step - loss: 0.1052 - accuracy: 0.9552 - val_loss: 0.6336 - val_accuracy: 0.8229
Epoch 19/50
28/28 [==============================] - 2s 77ms/step - loss: 0.6009 - accuracy: 0.8318 - val_loss: 1.6065 - val_accuracy: 0.5833
Epoch 20/50
28/28 [==============================] - 2s 76ms/step - loss: 0.6344 - accuracy: 0.7556 - val_loss: 0.6052 - val_accuracy: 0.8438
Epoch 21/50
28/28 [==============================] - 2s 76ms/step - loss: 0.1741 - accuracy: 0.9372 - val_loss: 0.4656 - val_accuracy: 0.8854
Epoch 22/50
28/28 [==============================] - 2s 76ms/step - loss: 0.1283 - accuracy: 0.9484 - val_loss: 0.4183 - val_accuracy: 0.8958
Epoch 23/50
28/28 [==============================] - 2s 78ms/step - loss: 0.1349 - accuracy: 0.9596 - val_loss: 0.8055 - val_accuracy: 0.8021
Epoch 24/50
28/28 [==============================] - 2s 76ms/step - loss: 0.1731 - accuracy: 0.9372 - val_loss: 0.8190 - val_accuracy: 0.8073
Epoch 25/50
28/28 [==============================] - 2s 76ms/step - loss: 0.1497 - accuracy: 0.9552 - val_loss: 0.5631 - val_accuracy: 0.8906
Epoch 26/50
28/28 [==============================] - 2s 78ms/step - loss: 0.1280 - accuracy: 0.9574 - val_loss: 0.7072 - val_accuracy: 0.8333
Epoch 27/50
28/28 [==============================] - 2s 76ms/step - loss: 0.2709 - accuracy: 0.9193 - val_loss: 0.3914 - val_accuracy: 0.9219
Epoch 28/50
28/28 [==============================] - 2s 77ms/step - loss: 0.1652 - accuracy: 0.9305 - val_loss: 0.5742 - val_accuracy: 0.8333
Epoch 29/50
28/28 [==============================] - 2s 77ms/step - loss: 0.0859 - accuracy: 0.9664 - val_loss: 0.4092 - val_accuracy: 0.9115
Epoch 30/50
28/28 [==============================] - 2s 77ms/step - loss: 0.0178 - accuracy: 0.9978 - val_loss: 0.4349 - val_accuracy: 0.9271
Epoch 31/50
28/28 [==============================] - 2s 75ms/step - loss: 0.0105 - accuracy: 1.0000 - val_loss: 0.4285 - val_accuracy: 0.9271
Epoch 32/50
28/28 [==============================] - 2s 77ms/step - loss: 0.0683 - accuracy: 0.9753 - val_loss: 0.6141 - val_accuracy: 0.8698
Epoch 33/50
28/28 [==============================] - 2s 78ms/step - loss: 0.2401 - accuracy: 0.9417 - val_loss: 0.7953 - val_accuracy: 0.8281
Epoch 34/50
28/28 [==============================] - 2s 76ms/step - loss: 0.2661 - accuracy: 0.9148 - val_loss: 0.8167 - val_accuracy: 0.8281
Epoch 35/50
28/28 [==============================] - 2s 76ms/step - loss: 0.0997 - accuracy: 0.9641 - val_loss: 0.5270 - val_accuracy: 0.9115
Epoch 36/50
28/28 [==============================] - 2s 76ms/step - loss: 0.0985 - accuracy: 0.9619 - val_loss: 0.4618 - val_accuracy: 0.9219
Epoch 37/50
28/28 [==============================] - 2s 75ms/step - loss: 0.0264 - accuracy: 0.9933 - val_loss: 0.4260 - val_accuracy: 0.9375
Epoch 38/50
28/28 [==============================] - 2s 77ms/step - loss: 0.0070 - accuracy: 1.0000 - val_loss: 0.4448 - val_accuracy: 0.9271
Epoch 39/50
28/28 [==============================] - 2s 77ms/step - loss: 0.0025 - accuracy: 1.0000 - val_loss: 0.4540 - val_accuracy: 0.9323
Epoch 40/50
28/28 [==============================] - 2s 77ms/step - loss: 0.0022 - accuracy: 1.0000 - val_loss: 0.4548 - val_accuracy: 0.9323
Epoch 41/50
28/28 [==============================] - 2s 77ms/step - loss: 0.0014 - accuracy: 1.0000 - val_loss: 0.4631 - val_accuracy: 0.9323
Epoch 42/50
28/28 [==============================] - 2s 83ms/step - loss: 0.0012 - accuracy: 1.0000 - val_loss: 0.4669 - val_accuracy: 0.9323
Epoch 43/50
28/28 [==============================] - 2s 78ms/step - loss: 0.0011 - accuracy: 1.0000 - val_loss: 0.4708 - val_accuracy: 0.9323
Epoch 44/50
28/28 [==============================] - 2s 76ms/step - loss: 0.0010 - accuracy: 1.0000 - val_loss: 0.4654 - val_accuracy: 0.9323
Epoch 45/50
28/28 [==============================] - 2s 77ms/step - loss: 9.7332e-04 - accuracy: 1.0000 - val_loss: 0.4633 - val_accuracy: 0.9323
Epoch 46/50
28/28 [==============================] - 2s 77ms/step - loss: 8.7667e-04 - accuracy: 1.0000 - val_loss: 0.4779 - val_accuracy: 0.9323
Epoch 47/50
28/28 [==============================] - 2s 76ms/step - loss: 8.4377e-04 - accuracy: 1.0000 - val_loss: 0.4851 - val_accuracy: 0.9323
Epoch 48/50
28/28 [==============================] - 2s 76ms/step - loss: 8.1836e-04 - accuracy: 1.0000 - val_loss: 0.4809 - val_accuracy: 0.9323
Epoch 49/50
28/28 [==============================] - 2s 76ms/step - loss: 7.3665e-04 - accuracy: 1.0000 - val_loss: 0.4744 - val_accuracy: 0.9323
Epoch 50/50
28/28 [==============================] - 2s 75ms/step - loss: 7.0643e-04 - accuracy: 1.0000 - val_loss: 0.4923 - val_accuracy: 0.9271
In [14]:
plt.figure(figsize=(10,7))
plt.subplot(1,2,1)
plt.plot(hist.history['accuracy'],label='accuracy')
plt.plot(hist.history['loss'],label='loss')
plt.legend()
plt.title("training set")
plt.grid()
plt.subplot(1,2,2)
plt.plot(hist.history['val_accuracy'],label='val_accuracy')
plt.plot(hist.history['val_loss'],label='val_loss')
plt.legend()
plt.title("validation set")
plt.grid()
plt.ylim((0,4))
Out[14]:
(0.0, 4.0)
No description has been provided for this image

Test set accuracy¶

In [15]:
score = vggmodel.evaluate(dx_test,y_test)
print("accuracy: ", score[1])
5/5 [==============================] - 0s 75ms/step - loss: 0.2725 - accuracy: 0.9375
accuracy:  0.9375
In [16]:
pred = vggmodel.predict(dx_test)
prediction = np.argmax(pred,axis=1)
true = np.argmax(y_test,axis=1)
best_prob = [pred[num,:][i] for num,i in enumerate(prediction)]
In [17]:
plt.figure(figsize = (9,8))
class_label = ['jute','maize','rice','sugarcane','wheat']
fig = sns.heatmap(confusion_matrix(true,prediction),cmap= "coolwarm",annot=True,vmin=0,cbar = False,
            center = True,xticklabels=class_label,yticklabels=class_label)
fig.set_xlabel("Prediction",fontsize=30)
fig.xaxis.set_label_position('top')
fig.set_ylabel("True",fontsize=30)
fig.xaxis.tick_top()
No description has been provided for this image
In [18]:
def deepmodelpipeline(imagepath,model = vggmodel,label=[-1]):
    pdict = {0:"jute",1:"maize",2:"rice",3:"sugarcane",4:"wheat"}
    pred_x = deep_pipeline([imagepath])
    prediction = model.predict(pred_x)
    pred = np.argmax(prediction[0])
    plt.imshow(plt.imread(imagepath))
    if (label[0]!=-1):
        plt.title("prediction : {0} % {1:.2f} \ntrue        : {2}".format(pdict[pred],prediction[0,pred]*100,pdict[np.argmax(label)]))
    else:
        plt.title("prediction : {0}, % {1:.2f}".format(pdict[pred],prediction[0,pred]*100))
In [19]:
deepmodelpipeline('/kaggle/input/agriculture-crop-images/kag2/rice/rice024ahs.jpeg')
No description has been provided for this image
In [20]:
deepmodelpipeline('../input/agriculture-crop-images/kag2/wheat/wheat0004a.jpeg')
No description has been provided for this image
In [21]:
deepmodelpipeline("../input/agriculture-crop-images/kag2/maize/maize008ahf.jpeg")
No description has been provided for this image
In [22]:
deepmodelpipeline("../input/agriculture-crop-images/kag2/jute/jute005a.jpeg")
No description has been provided for this image

Prediction on test images¶

In [23]:
plt.figure(figsize=(20,20))
for num,path in enumerate(X_test[0:20]):
    plt.subplot(4,5,num+1)
    deepmodelpipeline(path,vggmodel,y_test[num])
No description has been provided for this image
In [24]:
plt.figure(figsize=(20,20))
for num,path in enumerate(X_test[20:40]):
    plt.subplot(4,5,num+1)
    deepmodelpipeline(path,vggmodel,y_test[num+20])
No description has been provided for this image
In [25]:
plt.figure(figsize=(20,20))
for num,path in enumerate(X_test[40:60]):
    plt.subplot(4,5,num+1)
    deepmodelpipeline(path,vggmodel,y_test[num+40])
No description has been provided for this image
In [26]:
plt.figure(figsize=(20,20))
for num,path in enumerate(X_test[100:120]):
    plt.subplot(4,5,num+1)
    deepmodelpipeline(path,vggmodel,y_test[num+100])
No description has been provided for this image
In [27]:
plt.figure(figsize=(20,20))
for num,path in enumerate(X_test[140:160]):
    plt.subplot(4,5,num+1)
    deepmodelpipeline(path,vggmodel,y_test[num+140])
No description has been provided for this image
In [28]:
vggmodel.save_weights("vggmodelweight.h5")
In [29]:
def resize_image(image_array):
    return cv2.resize(image_array,(224,224))
def rescale_image(image_array):
    return image_array*1./255
def read_image(image_path):
    return plt.imread(image_path)
def plot_image(image_array):
    try:
        plt.imshow(image_array)
    except:
        plt.imshow(image_array[0])
def preprocess_image(image_path,reshape = True):
    image = read_image(image_path)
    image = resize_image(image)
    image = rescale_image(image)
    if(reshape ==  True):
        image = image.reshape(-1,image.shape[0],image.shape[1],image.shape[2])    
    return image
def preprocess_imageslist(image_list):
    imagelist = np.array([preprocess_image(img,reshape=False) for img in image_list])
    return imagelist
def predict_and_plot(image,model):
    pred_dict = {0:"jute",1:"maize",2:"rice",3:"sugarcane",4:"wheat"}
    plt.imshow(image[0])
    prediction = model.predict(image) 
    pred = pred_dict[np.argmax(prediction)]
    plt.title(pred)
%matplotlib inline
def predict_and_plot5(imagelist,model):
    pred_dict = {0:"jute",1:"maize",2:"rice",3:"sugarcane",4:"wheat"}
    plt.figure(figsize=(20,10))
    for num,image in enumerate(imagelist):
        plt.subplot(1,5,num+1)
        plt.imshow(image)
        prediction = model.predict(image.reshape(-1,224,224,3)) 
        pred = pred_dict[np.argmax(prediction)]
        plt.title(pred)
def predict_and_plot51(imagelist,model):
    pred_dict = {0:"jute",1:"maize",2:"rice",3:"sugarcane",4:"wheat"}
    plt.figure(figsize=(100,10))
    for i in range(0,55,5):
        predict_and_plot5(imagelist[i:i+5],vggmodel)
def predict_and_plot30(imagelist,model):
    plt.figure(figsize=(40,10))
    for i in range(0,len(imagelist),5):
        predict_and_plot5(imagelist[i:i+5],vggmodel)
In [30]:
testdata = pd.read_csv('../input/testssss/testdata.csv')
In [31]:
imagepath = testdata['testpath'].values
true_label = testdata['croplabel'].values
testdata.head()
Out[31]:
Unnamed: 0 testpath crop croplabel
0 0 /kaggle/input/testssss/test_crop_image/wheat-f... wheat 4
1 1 /kaggle/input/testssss/test_crop_image/jute-fi... jute 0
2 2 /kaggle/input/testssss/test_crop_image/wheat-c... wheat 4
3 3 /kaggle/input/testssss/test_crop_image/maize00... maize 1
4 4 /kaggle/input/testssss/test_crop_image/rice-fi... rice 2
In [32]:
process_image = preprocess_imageslist(imagepath)
In [33]:
predict_and_plot51(process_image,model=vggmodel)
<Figure size 7200x720 with 0 Axes>
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
In [34]:
test_prediction = np.argmax(vggmodel.predict(process_image),axis=1)
In [35]:
accuracy_score(true_label,test_prediction)
Out[35]:
0.6666666666666666
In [36]:
plt.figure(figsize = (9,8))
class_label = ['jute','maize','rice','sugarcane','wheat']
fig = sns.heatmap(confusion_matrix(true_label,test_prediction),cmap= "coolwarm",annot=True,vmin=0,cbar = False,
            center = True,xticklabels=class_label,yticklabels=class_label)
fig.set_xlabel("Prediction",fontsize=30)
fig.xaxis.set_label_position('top')
fig.set_ylabel("True",fontsize=30)
fig.xaxis.tick_top()
No description has been provided for this image
In [37]:
classify = true_label==test_prediction
wrong_classify = [num for num,p in enumerate(classify) if p==False]
correct_classify = [num for num,p in enumerate(classify) if p==True]
In [38]:
correct_class_image = process_image[correct_classify]
wrong_class_image = process_image[wrong_classify]
In [39]:
predict_and_plot30(wrong_class_image,vggmodel)
<Figure size 2880x720 with 0 Axes>
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
In [40]:
predict_and_plot30(correct_class_image,vggmodel)
<Figure size 2880x720 with 0 Axes>
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
In [41]:
labels = test_prediction.reshape(-1,1)
imagepath = imagepath.reshape(-1,1)
df = pd.DataFrame(np.append(imagepath,labels,axis=1),columns=['pathname','label'])
df.head()
Out[41]:
pathname label
0 /kaggle/input/testssss/test_crop_image/wheat-f... 4
1 /kaggle/input/testssss/test_crop_image/jute-fi... 0
2 /kaggle/input/testssss/test_crop_image/wheat-c... 4
3 /kaggle/input/testssss/test_crop_image/maize00... 1
4 /kaggle/input/testssss/test_crop_image/rice-fi... 2
In [42]:
df.to_csv('submission.csv')
In [43]:
get_image_path = "../input/testssss/test_crop_image/jute-field.jpg"
In [44]:
image1 = preprocess_image(get_image_path)
predict_and_plot(image1,model = vggmodel)
No description has been provided for this image